diff --git a/Dockerfile b/Dockerfile index 7be75a98..24060626 100644 --- a/Dockerfile +++ b/Dockerfile @@ -22,7 +22,4 @@ COPY core ./core COPY app.py . COPY config.py . -# run database migrations -RUN alembic upgrade head - CMD ["flask", "run", "--host=0.0.0.0", "--port=5000"] \ No newline at end of file diff --git a/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py b/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py deleted file mode 100644 index f2ea30f3..00000000 --- a/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py +++ /dev/null @@ -1,22 +0,0 @@ -"""delete dataset readme table - -Revision ID: 72ac2b020c7c -Revises: -Create Date: 2023-11-08 15:47:00.205940 - -""" -from typing import Sequence, Union - -import alembic -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = "72ac2b020c7c" -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - alembic.op.drop_table("dataset_readme") diff --git a/apis/__init__.py b/apis/__init__.py index bf2e0873..bfb5d0c0 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -16,6 +16,7 @@ from .dataset_metadata.dataset_description import api as description from .dataset_metadata.dataset_funder import api as funder from .dataset_metadata.dataset_other import api as dataset_other +from .dataset_metadata.dataset_readme import api as readme from .dataset_metadata.dataset_record_keys import api as record_keys from .dataset_metadata.dataset_related_item import api as related_item from .dataset_metadata.dataset_rights import api as rights @@ -62,6 +63,7 @@ "description", "funder", "dataset_other", + "readme", "record_keys", "related_item", "api", diff --git a/apis/dataset.py b/apis/dataset.py index ff933fda..dc609a75 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -1,6 +1,6 @@ import typing -from flask import jsonify, request +from flask import Response, jsonify, request from flask_restx import Namespace, Resource, fields import model @@ -32,8 +32,6 @@ "created_at": fields.String(required=True), "dataset_versions": fields.Nested(dataset_versions_model, required=True), "latest_version": fields.String(required=True), - "title": fields.String(required=True), - "description": fields.String(required=True), }, ) @@ -43,7 +41,6 @@ class DatasetList(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") @api.marshal_with(dataset) - @api.doc("view datasets") def get(self, study_id): study = model.Study.query.get(study_id) datasets = model.Dataset.query.filter_by(study=study) @@ -51,12 +48,13 @@ def get(self, study_id): @api.response(201, "Success") @api.response(400, "Validation Error") - @api.doc("add datasets") + @api.doc("update dataset") @api.expect(dataset) def post(self, study_id): study = model.Study.query.get(study_id) if not is_granted("add_dataset", study): return "Access denied, you can not modify", 403 + # todo if study.participant id== different study Throw error data: typing.Union[typing.Any, dict] = request.json dataset_ = model.Dataset.from_data(study) model.db.session.add(dataset_) @@ -79,7 +77,6 @@ def post(self, study_id): @api.route("/study//dataset/") @api.response(201, "Success") @api.response(400, "Validation Error") -@api.doc("view dataset") class DatasetResource(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") @@ -89,7 +86,6 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(201, "Success") @api.response(400, "Validation Error") - @api.doc("update dataset") def put(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) @@ -106,20 +102,26 @@ def put(self, study_id: int, dataset_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") - @api.doc("delete dataset") def delete(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 - data_obj = model.Dataset.query.get(dataset_id) for version in data_obj.dataset_versions: model.db.session.delete(version) - model.db.session.delete(data_obj) model.db.session.commit() return 204 + # def delete(self, study_id: int, dataset_id: int, version_id: int): + # data_obj = Dataset.query.get(dataset_id) + # for version in data_obj.dataset_versions: + # db.session.delete(version) + # db.session.commit() + # db.session.delete(data_obj) + # db.session.commit() + # return Response(status=204) + @api.route("/study//dataset//version/") class VersionResource(Resource): @@ -129,15 +131,9 @@ class VersionResource(Resource): def get( self, study_id: int, dataset_id: int, version_id: int ): # pylint: disable= unused-argument - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 dataset_version = model.Version.query.get(version_id) return dataset_version.to_dict() - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("update dataset version") def put( self, study_id: int, dataset_id: int, version_id: int ): # pylint: disable= unused-argument @@ -149,86 +145,26 @@ def put( model.db.session.commit() return jsonify(data_version_obj.to_dict()), 201 - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("delete dataset version") def delete( self, study_id: int, dataset_id: int, version_id: int ): # pylint: disable= unused-argument study = model.Study.query.get(study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 - version_obj = model.Version.query.get(version_id) - model.db.session.delete(version_obj) - model.db.session.commit() - return 204 - - -@api.route("/study//dataset//version//changelog") -class VersionDatasetChangelog(Resource): - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version changelog") - def get(self, study_id: str, dataset_id: str, version_id: str): - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - version = model.Version.query.filter_by( - id=version_id, dataset_id=dataset_id - ).one_or_none() - return {"changelog": version.changelog} - - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version changelog update") - def put( - self, study_id: str, dataset_id: str, version_id: str - ): # pylint: disable= unused-argument - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - data: typing.Union[typing.Any, dict] = request.json - version_ = model.Version.query.get(version_id) - version_.changelog = data["changelog"] - model.db.session.commit() - return 201 - - -@api.route("/study//dataset//version//readme") -class VersionDatasetReadme(Resource): - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version readme") - def get(self, study_id: str, dataset_id: str, version_id: str): - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - version = model.Version.query.filter_by( - id=version_id, dataset_id=dataset_id - ).one_or_none() - return version.version_readme.to_dict(), 200 - - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version readme update") - def put( - self, study_id: str, dataset_id: str, version_id: str - ): # pylint: disable= unused-argument - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - data = request.json - version_ = model.Version.query.get(version_id) - version_.version_readme.update(data) + data_obj = model.Dataset.query.get(dataset_id) + for version in data_obj.dataset_versions: + model.db.session.delete(version) + model.db.session.commit() + model.db.session.delete(data_obj) model.db.session.commit() - return 201 + return Response(status=204) @api.route("/study//dataset//version") class VersionList(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") - @api.doc("view versions") + @api.doc("versions") def get(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("version", study): @@ -266,37 +202,21 @@ def post(self, study_id: int, dataset_id: int): # return "Access denied, you can not modify", 403 # data_obj = model.Version.query.get(version_id) # data: typing.Union[typing.Any, dict] = request.json +# dataset_versions = model.Version.from_data(data_obj, data) # model.db.session.commit() # return dataset_versions.to_dict() -@api.route("/study//dataset//version//study-metadata") -class VersionDatasetMetadataResource(Resource): - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version study metadata get") - def get(self, study_id: str, dataset_id: str, version_id: str): - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - version = model.Version.query.filter_by( - id=version_id, dataset_id=dataset_id - ).one_or_none() - return version.dataset.study.to_dict_study_metadata() - - -@api.route( - "/study//dataset//version//dataset-metadata" -) -class VersionStudyMetadataResource(Resource): - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version dataset metadata get") - def get(self, study_id: str, dataset_id: str, version_id: str): - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - version = model.Version.query.filter_by( - id=version_id, dataset_id=dataset_id - ).one_or_none() - return version.dataset.to_dict_dataset_metadata() +# +# @api.route("/study//dataset/ +# /version//dataset-metadata") +# class VersionStudyMetadataResource(Resource): +# @api.response(201, "Success") +# @api.response(400, "Validation Error") +# @api.doc("version dataset metadata get") +# def get(self, study_id: int, dataset_id: int, version_id): +# study = model.Study.query.get(study_id) +# if not is_granted("dataset", study): +# return "Access denied, you can not modify", 403 +# version = dataset.dataset_version.get(version_id) +# pass diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py new file mode 100644 index 00000000..edd1e0ad --- /dev/null +++ b/apis/dataset_metadata/dataset_readme.py @@ -0,0 +1,33 @@ +from flask import request +from flask_restx import Resource, fields + +import model +from apis.authentication import is_granted +from apis.dataset_metadata_namespace import api + +dataset_readme = api.model( + "DatasetReadme", + {"id": fields.String(required=True), "content": fields.String(required=True)}, +) + + +@api.route("/study//dataset//metadata/readme") +class DatasetReadmeResource(Resource): + @api.doc("readme") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_readme) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + dataset_ = model.Dataset.query.get(dataset_id) + dataset_readme_ = dataset_.dataset_readme + return dataset_readme_.to_dict() + + def put(self, study_id: int, dataset_id: int): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + data = request.json + dataset_ = model.Dataset.query.get(dataset_id) + dataset_.dataset_readme.update(data) + model.db.session.commit() + return dataset_.dataset_readme.to_dict() diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index da3defdd..df5bca5b 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -160,6 +160,7 @@ def post(self, study_id: int, dataset_id: int): "Subtitle", "TranslatedTitle", "OtherTitle", + "MainTitle", ], }, }, diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index 23793e7e..b3d245c8 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -63,6 +63,7 @@ def post(self, study_id: int, dataset_id: int): "Subtitle", "TranslatedTitle", "OtherTitle", + "MainTitle", ], }, }, @@ -82,12 +83,17 @@ def post(self, study_id: int, dataset_id: int): for i in data: if "id" in i and i["id"]: dataset_title_ = model.DatasetTitle.query.get(i["id"]) + # if dataset_title_.type == "MainTitle": + # return ( + # "Main Title type can not be modified", + # 403, + # dataset_title_.update(i) list_of_elements.append(dataset_title_.to_dict()) elif "id" not in i or not i["id"]: if i["type"] == "MainTitle": return ( - "Main Title type can not be given", + "MainTitle type can not be given", 403, ) dataset_title_ = model.DatasetTitle.from_data(data_obj, i) @@ -119,7 +125,7 @@ def delete( dataset_title_ = model.DatasetTitle.query.get(title_id) if dataset_title_.type == "MainTitle": return ( - "Main Title type can not be deleted", + "MainTitle type can not be deleted", 403, ) model.db.session.delete(dataset_title_) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index ebadf2e6..0b8fd32c 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -13,6 +13,7 @@ study_design = api.model( "StudyDesign", { + "id": fields.String(required=True), "design_allocation": fields.String(required=True), "study_type": fields.String(required=True), "design_intervention_model": fields.String(required=True), @@ -73,7 +74,7 @@ def put(self, study_id: int): "type": ["string", "null"], }, "design_who_masked_list": { - "type": ["array", "null"], + "type": "array", "items": { "type": "string", "oneOf": [ @@ -90,7 +91,7 @@ def put(self, study_id: int): "uniqueItems": True, }, "phase_list": { - "type": ["array", "null"], + "type": "array", "items": { "type": "string", "oneOf": [ @@ -110,9 +111,9 @@ def put(self, study_id: int): }, "uniqueItems": True, }, - "enrollment_count": {"type": ["integer", "null"]}, + "enrollment_count": {"type": "integer"}, "enrollment_type": { - "type": ["string", "null"], + "type": "string", "enum": ["Actual", "Anticipated"], }, "number_arms": {"type": ["integer", "null"]}, @@ -127,7 +128,7 @@ def put(self, study_id: int): "Case-Control", "Case-Only", "Case-Crossover", - "Ecologic or Community", + "Ecologic or Community Study", "Family-Based", "Other", ] @@ -154,8 +155,12 @@ def put(self, study_id: int): "uniqueItems": True, }, "bio_spec_retention": {"type": ["string", "null"]}, - "bio_spec_description": {"type": ["string", "null"]}, - "target_duration": {"type": ["string", "null"]}, + "bio_spec_description": { + "type": ["string", "null"], + }, + "target_duration": { + "type": ["string", "null"], + }, "number_groups_cohorts": {"type": ["integer", "null"]}, }, } diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index fd04de4a..9030641c 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -91,7 +91,8 @@ def get(self, study_id: int): study_ = model.Study.query.get(study_id) study_oversight_has_dmc = study_.study_other.oversight_has_dmc - return {"oversight": study_oversight_has_dmc} + + return study_oversight_has_dmc def put(self, study_id: int): """Update study oversight metadata""" diff --git a/model/__init__.py b/model/__init__.py index 77f8ffeb..5b91a29a 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -18,6 +18,7 @@ from .dataset_metadata.dataset_description import DatasetDescription from .dataset_metadata.dataset_funder import DatasetFunder from .dataset_metadata.dataset_other import DatasetOther +from .dataset_metadata.dataset_readme import DatasetReadme from .dataset_metadata.dataset_record_keys import DatasetRecordKeys from .dataset_metadata.dataset_rights import DatasetRights from .dataset_metadata.dataset_subject import DatasetSubject @@ -70,6 +71,7 @@ "DatasetFunder", "DatasetAlternateIdentifier", "DatasetRights", + "DatasetReadme", "DatasetRecordKeys", "DatasetTitle", "DatasetSubject", diff --git a/model/dataset.py b/model/dataset.py index 96bc4d41..98cdc078 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -20,6 +20,7 @@ def __init__(self, study): self.dataset_record_keys = model.DatasetRecordKeys(self) self.dataset_de_ident_level = model.DatasetDeIdentLevel(self) self.dataset_consent = model.DatasetConsent(self) + self.dataset_readme = model.DatasetReadme(self) self.dataset_other = model.DatasetOther(self) self.dataset_title.append(model.DatasetTitle(self)) @@ -88,6 +89,9 @@ def __init__(self, study): dataset_other = db.relationship( "DatasetOther", back_populates="dataset", uselist=False, cascade="all, delete" ) + dataset_readme = db.relationship( + "DatasetReadme", back_populates="dataset", uselist=False, cascade="all, delete" + ) dataset_record_keys = db.relationship( "DatasetRecordKeys", back_populates="dataset", @@ -109,61 +113,14 @@ def __init__(self, study): def to_dict(self): last_published = self.last_published() + # last_modified = self.last_modified() + return { "id": self.id, "created_at": self.created_at, # "dataset_versions": [i.to_dict() for i in self.dataset_versions], "latest_version": last_published.id if last_published else None, - "title": [ - i.title if i.title else None for i in self.dataset_title # type: ignore - ][0], - "description": [ - i.description if i.type == "Abstract" else None - for i in self.dataset_description # type: ignore - ][0], - } - - def to_dict_dataset_metadata(self): - return { - "contributors": [ - i.to_dict_metadata() - for i in self.dataset_contributors # type: ignore - if not i.creator - ], - "about": self.dataset_other.to_dict_metadata(), - "publisher": self.dataset_other.to_dict_publisher(), # type: ignore - "access": self.dataset_access.to_dict_metadata(), - "consent": self.dataset_consent.to_dict_metadata(), - "dates": [i.to_dict_metadata() for i in self.dataset_date], # type: ignore - "de_identification": self.dataset_de_ident_level.to_dict_metadata(), - "descriptions": [ - i.to_dict_metadata() for i in self.dataset_description # type: ignore - ], - "funders": [ - i.to_dict_metadata() for i in self.dataset_funder # type: ignore - ], - "identifiers": [ - i.to_dict_metadata() - for i in self.dataset_alternate_identifier # type: ignore - ], - "creators": [ - i.to_dict_metadata() - for i in self.dataset_contributors # type: ignore - if i.creator - ], - "record_keys": self.dataset_record_keys.to_dict_metadata(), - "related_items": [ - i.to_dict_metadata() for i in self.dataset_related_item # type: ignore - ], - "rights": [ - i.to_dict_metadata() for i in self.dataset_rights # type: ignore - ], - "subjects": [ - i.to_dict_metadata() for i in self.dataset_subject # type: ignore - ], - "titles": [ - i.to_dict_metadata() for i in self.dataset_title # type: ignore - ], + # "title": self.dataset_title.title if self.dataset_title else "" } def last_published(self): diff --git a/model/dataset_metadata/dataset_access.py b/model/dataset_metadata/dataset_access.py index 7ef72ffb..577a5e14 100644 --- a/model/dataset_metadata/dataset_access.py +++ b/model/dataset_metadata/dataset_access.py @@ -30,12 +30,6 @@ def to_dict(self): "url": self.url, } - def to_dict_metadata(self): - return { - "type": self.type, - "description": self.description, - } - @staticmethod def from_data(dataset: Dataset, data: dict): dataset_access = DatasetAccess(dataset) diff --git a/model/dataset_metadata/dataset_alternate_identifier.py b/model/dataset_metadata/dataset_alternate_identifier.py index 7eba2e98..1b2ebb70 100644 --- a/model/dataset_metadata/dataset_alternate_identifier.py +++ b/model/dataset_metadata/dataset_alternate_identifier.py @@ -28,13 +28,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "type": self.type, - "identifier": self.identifier, - } - @staticmethod def from_data(dataset, data: dict): dataset_date = DatasetAlternateIdentifier(dataset) diff --git a/model/dataset_metadata/dataset_consent.py b/model/dataset_metadata/dataset_consent.py index e7ea1cd3..2258ea5d 100644 --- a/model/dataset_metadata/dataset_consent.py +++ b/model/dataset_metadata/dataset_consent.py @@ -38,13 +38,6 @@ def to_dict(self): "details": self.details, } - def to_dict_metadata(self): - return { - "noncommercial": self.noncommercial, - "geog_restrict": self.geog_restrict, - "research_type": self.research_type, - } - @staticmethod def from_data(dataset, data: dict): dataset_consent = DatasetConsent(dataset) diff --git a/model/dataset_metadata/dataset_contributor.py b/model/dataset_metadata/dataset_contributor.py index 2e14d02a..6d4f54d7 100644 --- a/model/dataset_metadata/dataset_contributor.py +++ b/model/dataset_metadata/dataset_contributor.py @@ -40,15 +40,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "name": self.name, - "name_type": self.name_identifier, - "contributor_type": self.contributor_type, - "creator": self.creator, - } - @staticmethod def from_data(dataset, data: dict): dataset_contributor = DatasetContributor(dataset) diff --git a/model/dataset_metadata/dataset_date.py b/model/dataset_metadata/dataset_date.py index f1da513c..c58a1741 100644 --- a/model/dataset_metadata/dataset_date.py +++ b/model/dataset_metadata/dataset_date.py @@ -31,16 +31,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - bigint_timestamp = self.date - unix_timestamp = bigint_timestamp / 1000 - datetime_obj = datetime.datetime.utcfromtimestamp(unix_timestamp) - return { - "id": self.id, - "date": datetime_obj.strftime("%m-%d-%Y"), - "type": self.type, - } - @staticmethod def from_data(dataset, data: dict): dataset_date = DatasetDate(dataset) diff --git a/model/dataset_metadata/dataset_de_ident_level.py b/model/dataset_metadata/dataset_de_ident_level.py index b5acccc4..4d8e1cc1 100644 --- a/model/dataset_metadata/dataset_de_ident_level.py +++ b/model/dataset_metadata/dataset_de_ident_level.py @@ -38,12 +38,6 @@ def to_dict(self): "details": self.details, } - def to_dict_metadata(self): - return { - "direct": self.direct, - "type": self.type, - } - @staticmethod def from_data(dataset, data: dict): dataset_de_ident_level = DatasetDeIdentLevel(dataset) diff --git a/model/dataset_metadata/dataset_description.py b/model/dataset_metadata/dataset_description.py index 6660e1f9..97f3a8a2 100644 --- a/model/dataset_metadata/dataset_description.py +++ b/model/dataset_metadata/dataset_description.py @@ -30,13 +30,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "description": self.description, - "type": self.type, - } - @staticmethod def from_data(dataset, data: dict): dataset_description = DatasetDescription(dataset) diff --git a/model/dataset_metadata/dataset_funder.py b/model/dataset_metadata/dataset_funder.py index 061e7d31..90c45551 100644 --- a/model/dataset_metadata/dataset_funder.py +++ b/model/dataset_metadata/dataset_funder.py @@ -38,13 +38,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "name": self.name, - "identifier": self.identifier, - } - @staticmethod def from_data(dataset, data: dict): dataset_funder = DatasetFunder(dataset) diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index 0225972b..5a96918f 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -44,20 +44,6 @@ def to_dict(self): "resource_type": self.resource_type, } - def to_dict_metadata(self): - return { - "language": self.language, - "size": self.size, - "resource_type": self.resource_type, - } - - def to_dict_publisher(self): - return { - "managing_organization_name": self.managing_organization_name, - "managing_organization_ror_id": self.managing_organization_ror_id, - "publisher": self.publisher, - } - @staticmethod def from_data(dataset, data: dict): dataset_other = DatasetOther(dataset) diff --git a/model/dataset_metadata/dataset_readme.py b/model/dataset_metadata/dataset_readme.py new file mode 100644 index 00000000..7fa75b3a --- /dev/null +++ b/model/dataset_metadata/dataset_readme.py @@ -0,0 +1,31 @@ +from ..db import db + + +class DatasetReadme(db.Model): # type: ignore + def __init__(self, dataset): + self.dataset = dataset + self.content = "" + + __tablename__ = "dataset_readme" + content = db.Column(db.String, nullable=False) + + dataset_id = db.Column( + db.CHAR(36), db.ForeignKey("dataset.id"), primary_key=True, nullable=False + ) + dataset = db.relationship("Dataset", back_populates="dataset_readme") + + def to_dict(self): + return { + "id": self.dataset_id, + "content": self.content, + } + + @staticmethod + def from_data(dataset, data: dict): + dataset_readme = DatasetReadme(dataset) + dataset_readme.update(data) + return dataset_readme + + def update(self, data: dict): + self.content = data["content"] + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_record_keys.py b/model/dataset_metadata/dataset_record_keys.py index 9f2d9b94..993af3f2 100644 --- a/model/dataset_metadata/dataset_record_keys.py +++ b/model/dataset_metadata/dataset_record_keys.py @@ -17,12 +17,6 @@ def __init__(self, dataset): dataset = db.relationship("Dataset", back_populates="dataset_record_keys") def to_dict(self): - return { - "type": self.key_type, - "details": self.key_details, - } - - def to_dict_metadata(self): return { "key_type": self.key_type, "key_details": self.key_details, diff --git a/model/dataset_metadata/dataset_related_item.py b/model/dataset_metadata/dataset_related_item.py index f95db200..579bed63 100644 --- a/model/dataset_metadata/dataset_related_item.py +++ b/model/dataset_metadata/dataset_related_item.py @@ -52,6 +52,7 @@ def to_dict(self): key=lambda creator: creator.created_at, ) creators = [c for c in sorted_contributors if c.creator] + contributors = [c for c in sorted_contributors if not c.creator] return { "id": self.id, @@ -96,39 +97,6 @@ def to_dict(self): ], } - def to_dict_metadata(self): - bigint_timestamp = self.dataset_related_item_other.publication_year - pub_year = "" - if bigint_timestamp: - unix_timestamp = bigint_timestamp / 1000 - datetime_obj = datetime.datetime.utcfromtimestamp(unix_timestamp) - pub_year = datetime_obj.strftime("%Y") - sorted_contributors = sorted( - self.dataset_related_item_contributor, - key=lambda creator: creator.created_at, - ) - - creators = [c for c in sorted_contributors if c.creator] - contributors = [c for c in sorted_contributors if not c.creator] - return { - "type": self.type, - "titles": [ - i.to_dict_metadata() - for i in self.dataset_related_item_title # type: ignore - ], - "identifiers": [ - i.to_dict_metadata() - for i in self.dataset_related_item_identifier # type: ignore - ], - "creators": [i.to_dict_metadata() for i in creators], # type: ignore - "contributors": [ - i.to_dict_metadata() for i in contributors # type: ignore - ], - # "publication_year": self.dataset_related_item_other.publication_year, - "publication_year": pub_year if bigint_timestamp else None, - "publisher": self.dataset_related_item_other.publisher, - } - @staticmethod def from_data(dataset, data: dict): dataset_related_item = DatasetRelatedItem(dataset) diff --git a/model/dataset_metadata/dataset_related_item_contributor.py b/model/dataset_metadata/dataset_related_item_contributor.py index 480757cb..b38b3651 100644 --- a/model/dataset_metadata/dataset_related_item_contributor.py +++ b/model/dataset_metadata/dataset_related_item_contributor.py @@ -36,14 +36,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "name": self.name, - "name_type": self.name_type, - "contributor_type": self.contributor_type, - } - @staticmethod def from_data(dataset_related_item, data: dict, creator): contributor_ = DatasetRelatedItemContributor(dataset_related_item, creator) diff --git a/model/dataset_metadata/dataset_related_item_identifier.py b/model/dataset_metadata/dataset_related_item_identifier.py index 63d95f4b..48b2e548 100644 --- a/model/dataset_metadata/dataset_related_item_identifier.py +++ b/model/dataset_metadata/dataset_related_item_identifier.py @@ -38,13 +38,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "identifier": self.identifier, - "type": self.type, - } - @staticmethod def from_data(dataset_related_item, data: dict): identifier_ = DatasetRelatedItemIdentifier(dataset_related_item) diff --git a/model/dataset_metadata/dataset_related_item_title.py b/model/dataset_metadata/dataset_related_item_title.py index fab997e5..2d4ea08d 100644 --- a/model/dataset_metadata/dataset_related_item_title.py +++ b/model/dataset_metadata/dataset_related_item_title.py @@ -32,13 +32,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "title": self.title, - "type": self.type, - } - @staticmethod def from_data(dataset_related_item, data: dict): dataset_related_item_title = DatasetRelatedItemTitle(dataset_related_item) diff --git a/model/dataset_metadata/dataset_rights.py b/model/dataset_metadata/dataset_rights.py index 77103cde..27ad1a63 100644 --- a/model/dataset_metadata/dataset_rights.py +++ b/model/dataset_metadata/dataset_rights.py @@ -33,13 +33,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "rights": self.rights, - "identifier": self.identifier, - } - @staticmethod def from_data(dataset, data: dict): dataset_rights = DatasetRights(dataset) diff --git a/model/dataset_metadata/dataset_subject.py b/model/dataset_metadata/dataset_subject.py index 0e6d5792..43ea560e 100644 --- a/model/dataset_metadata/dataset_subject.py +++ b/model/dataset_metadata/dataset_subject.py @@ -35,13 +35,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "subject": self.subject, - "scheme": self.scheme, - } - @staticmethod def from_data(dataset, data: dict): dataset_subject = DatasetSubject(dataset) diff --git a/model/dataset_metadata/dataset_title.py b/model/dataset_metadata/dataset_title.py index f8426471..eff54c9f 100644 --- a/model/dataset_metadata/dataset_title.py +++ b/model/dataset_metadata/dataset_title.py @@ -30,13 +30,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "title": self.title, - "type": self.type, - } - @staticmethod def from_data(dataset, data: dict): dataset_title = DatasetTitle(dataset) diff --git a/model/study.py b/model/study.py index 667b6f6f..a919bbae 100644 --- a/model/study.py +++ b/model/study.py @@ -172,53 +172,6 @@ def to_dict(self): "role": contributor_permission.to_dict()["role"], } - def to_dict_study_metadata(self): - # self.study_contact: Iterable = [] - primary = [ - i.to_dict_metadata() - for i in self.study_identification # type: ignore - if not i.secondary - ] - - return { - "arms": [i.to_dict_metadata() for i in self.study_arm], # type: ignore - "available_ipd": [ - i.to_dict_metadata() for i in self.study_available_ipd # type: ignore - ], - "contacts": [ - i.to_dict_metadata() for i in self.study_contact # type: ignore - ], - "description": self.study_description.to_dict_metadata(), - "design": self.study_design.to_dict(), - "eligibility": self.study_eligibility.to_dict_metadata(), - "primary_identifier": primary[0] if len(primary) else None, - "secondary_identifiers": [ - i.to_dict_metadata() - for i in self.study_identification # type: ignore - if i.secondary - ], - "interventions": [ - i.to_dict_metadata() for i in self.study_intervention # type: ignore - ], - "ipd_sharing": self.study_ipdsharing.to_dict_metadata(), - "links": [i.to_dict_metadata() for i in self.study_link], # type: ignore - "locations": [ - i.to_dict_metadata() for i in self.study_location # type: ignore - ], - "overall_officials": [ - i.to_dict_metadata() - for i in self.study_overall_official # type: ignore - ], - "references": [ - i.to_dict_metadata() for i in self.study_reference # type: ignore - ], - "sponsors": self.study_sponsors_collaborators.to_dict_metadata(), - "collaborators": self.study_sponsors_collaborators.collaborator_name, - "status": self.study_status.to_dict_metadata(), - "oversight": self.study_other.oversight_has_dmc, - "conditions": self.study_other.conditions, - } - @staticmethod def from_data(data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index db4609cf..01a9eed6 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -43,14 +43,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "id": self.id, - "label": self.label, - "description": self.description, - } - @staticmethod def from_data(study: model.Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index 5ae9913c..cd65d626 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -40,10 +40,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return {"identifier": self.identifier, "url": self.url} - @staticmethod def from_data(study: model.StudyArm, data: dict): """Creates a new study metadata from a dictionary""" diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index d2c5c5d3..4f943e6f 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -48,16 +48,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "id": self.id, - "name": self.name, - "affiliation": self.affiliation, - "phone": self.phone, - "email_address": self.email_address, - } - @staticmethod def from_data(study: Study, data: dict, role, central_contact): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index f5115882..be3289da 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -32,10 +32,6 @@ def to_dict(self): "detailed_description": self.detailed_description, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return {"brief_summary": self.brief_summary} - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 81019b09..752a69f4 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -67,17 +67,6 @@ def to_dict(self): else None, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "gender": self.gender, - "gender_based": self.gender_based, - "minimum_age_value": self.minimum_age_value, - "maximum_age_value": self.maximum_age_value, - "inclusion_criteria": self.inclusion_criteria, - "exclusion_criteria": self.exclusion_criteria, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index 299f20bd..f6373bf1 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -41,14 +41,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "identifier": self.identifier, - "identifier_type": self.identifier_type, - "id": self.id, - } - @staticmethod def from_data(study: Study, data: dict, secondary): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index bb946cd2..0230b832 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -45,14 +45,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "id": self.id, - "type": self.type, - "name": self.name, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index 9152959c..9d7868bc 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -46,13 +46,6 @@ def to_dict(self): "ipd_sharing_url": self.ipd_sharing_url, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "ipd_sharing": self.ipd_sharing, - "ipd_sharing_info_type_list": self.ipd_sharing_info_type_list, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index aa3ba44e..946704d2 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -36,14 +36,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "title": self.title, - "url": self.url, - "id": self.id, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index 27ac6476..06e640d6 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -44,15 +44,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "id": self.id, - "facility": self.facility, - "city": self.city, - "country": self.country, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index 237d5b98..f66758e9 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -41,13 +41,6 @@ def to_dict(self): "size": self.size, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "oversight_has_dmc": self.oversight_has_dmc, - "conditions": self.conditions, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index 069f3099..e173cd0b 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -38,14 +38,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "name": self.name, - "role": self.role, - "affiliation": self.affiliation, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index cce05886..113af7b0 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -38,14 +38,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "id": self.id, - "identifier": self.identifier, - "citation": self.citation, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index 9f916611..a8ab84f5 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -45,14 +45,6 @@ def to_dict(self): "lead_sponsor_name": self.lead_sponsor_name, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "responsible_party_type": self.responsible_party_type, - "responsible_party_investigator_name": self.responsible_party_investigator_name, - "lead_sponsor_name": self.lead_sponsor_name, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index dd1ffe78..24920e32 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -43,13 +43,6 @@ def to_dict(self): "completion_date_type": self.completion_date_type, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "overall_status": self.overall_status, - "start_date": self.start_date, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/version.py b/model/version.py index 3aa3bb5c..9cff0264 100644 --- a/model/version.py +++ b/model/version.py @@ -4,7 +4,6 @@ from sqlalchemy import Table -import model from model.dataset import Dataset from .db import db @@ -22,7 +21,6 @@ def __init__(self, dataset): self.dataset = dataset self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(timezone.utc).timestamp() - self.version_readme = model.VersionReadme(self) __tablename__ = "version" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/version_readme.py b/model/version_readme.py index c04bd1b5..ec657f54 100644 --- a/model/version_readme.py +++ b/model/version_readme.py @@ -2,10 +2,6 @@ class VersionReadme(db.Model): # type: ignore - def __init__(self, version): - self.version = version - self.content = "" - __tablename__ = "version_readme" content = db.Column(db.String, nullable=True) @@ -16,14 +12,14 @@ def __init__(self, version): def to_dict(self): return { - "readme": self.content, + "content": self.content, } @staticmethod - def from_data(version, data: dict): - readme = VersionReadme(version) - readme.update(data) - return readme + def from_data(data: dict): + user = VersionReadme() + user.update(data) + return user def update(self, data: dict): - self.content = data["readme"] + self.content = data["content"] diff --git a/poetry.lock b/poetry.lock index b9e4bb35..e322ea82 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,26 +1,5 @@ # This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. -[[package]] -name = "alembic" -version = "1.12.1" -description = "A database migration tool for SQLAlchemy." -optional = false -python-versions = ">=3.7" -files = [ - {file = "alembic-1.12.1-py3-none-any.whl", hash = "sha256:47d52e3dfb03666ed945becb723d6482e52190917fdb47071440cfdba05d92cb"}, - {file = "alembic-1.12.1.tar.gz", hash = "sha256:bca5877e9678b454706347bc10b97cb7d67f300320fa5c3a94423e8266e2823f"}, -] - -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} -importlib-resources = {version = "*", markers = "python_version < \"3.9\""} -Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" - -[package.extras] -tz = ["python-dateutil"] - [[package]] name = "aniso8601" version = "9.0.1" @@ -679,6 +658,51 @@ docopt = ">=0.6" minilog = ">=2.0" requests = ">=2.28,<3.0" +[[package]] +name = "cryptography" +version = "41.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"}, + {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"}, + {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"}, + {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"}, + {file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"}, + {file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"}, + {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "debugpy" version = "1.6.7" @@ -1649,6 +1673,19 @@ files = [ {file = "jupyterlab_widgets-3.0.8.tar.gz", hash = "sha256:d428ab97b8d87cc7c54cbf37644d6e0f0e662f23876e05fa460a73ec3257252a"}, ] +[[package]] +name = "jwt" +version = "1.3.1" +description = "JSON Web Token library for Python 3." +optional = false +python-versions = ">= 3.6" +files = [ + {file = "jwt-1.3.1-py3-none-any.whl", hash = "sha256:61c9170f92e736b530655e75374681d4fcca9cfa8763ab42be57353b2b203494"}, +] + +[package.dependencies] +cryptography = ">=3.1,<3.4.0 || >3.4.0" + [[package]] name = "lazy-object-proxy" version = "1.9.0" @@ -1694,25 +1731,6 @@ files = [ {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, ] -[[package]] -name = "mako" -version = "1.3.0" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Mako-1.3.0-py3-none-any.whl", hash = "sha256:57d4e997349f1a92035aa25c17ace371a4213f2ca42f99bee9a602500cfd54d9"}, - {file = "Mako-1.3.0.tar.gz", hash = "sha256:e3a9d388fd00e87043edbe8792f45880ac0114e9c4adc69f6e9bfb2c55e3b11b"}, -] - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["Babel"] -lingua = ["lingua"] -testing = ["pytest"] - [[package]] name = "markdown" version = "3.3.7" @@ -2642,7 +2660,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2650,15 +2667,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2675,7 +2685,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2683,7 +2692,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3258,6 +3266,7 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] name = "types-python-dateutil" version = "2.8.19.14" description = "Typing stubs for python-dateutil" +category = "dev" optional = false python-versions = "*" files = [ @@ -3569,4 +3578,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.16" -content-hash = "6baf6175b51ec48a4653b31437eb4d02ff6385d8f973566192dbd31cb9c8c586" +content-hash = "e3e1b2d0645e5cd7ad0281091d65e85b411eab0ddd7c475762e908bf9c10bdb4" diff --git a/pyproject.toml b/pyproject.toml index 60636010..2dee3a88 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,7 +53,6 @@ python-dotenv = "^1.0.0" flask-bcrypt = "^1.0.1" pyjwt = "^2.8.0" email-validator = "^2.0.0.post2" -alembic = "^1.12.1" [tool.poetry.group.dev.dependencies] diff --git a/sql/init.sql b/sql/init.sql index 1127e19a..bd8d6c79 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -243,7 +243,19 @@ INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "ma /*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; -- Dumping structure for table public.dataset_readme +CREATE TABLE IF NOT EXISTS "dataset_readme" ( + "id" CHAR(36) NOT NULL, + "content" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_readme_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); +-- Dumping data for table public.dataset_readme: 1 rows +/*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; +INSERT INTO "dataset_readme" ("id", "content", "dataset_id") VALUES + ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; -- Dumping structure for table public.dataset_record_keys CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index cfcb1cb1..9edd316b 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -297,9 +297,19 @@ INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "ma /*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; -- Dumping structure for table public.dataset_readme +CREATE TABLE IF NOT EXISTS "dataset_readme" ( + "id" CHAR(36) NOT NULL, + "content" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_readme_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); -- Dumping data for table public.dataset_readme: -1 rows /*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; +INSERT INTO "dataset_readme" ("id", "content", "dataset_id") VALUES + ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; -- Dumping structure for table public.dataset_record_keys CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py index 13099053..50bc66b3 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -50,23 +50,6 @@ def test_put_dataset_access_metadata(_logged_in_client): # ------------------- ALTERNATIVE IDENTIFIER METADATA ------------------- # -def test_get_alternative_identifier(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset alternative identifier content - """ - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" - ) - - assert response.status_code == 200 - - def test_post_alternative_identifier(_logged_in_client): """ Given a Flask application configured for testing and a study ID @@ -82,7 +65,7 @@ def test_post_alternative_identifier(_logged_in_client): json=[ { "identifier": "identifier test", - "type": "ARK", + "type": "ark", } ], ) @@ -92,7 +75,24 @@ def test_post_alternative_identifier(_logged_in_client): pytest.global_alternative_identifier_id = response_data[0]["id"] assert response_data[0]["identifier"] == "identifier test" - assert response_data[0]["type"] == "ARK" + assert response_data[0]["type"] == "ark" + + +def test_get_alternative_identifier(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset alternative identifier content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" + ) + + assert response.status_code == 200 def test_delete_alternative_identifier(_logged_in_client): @@ -685,6 +685,49 @@ def test_put_dataset_publisher_metadata(_logged_in_client): ) +# ------------------- README METADATA ------------------- # +def test_get_dataset_readme_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/readme' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + readme metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/readme" + ) + + assert response.status_code == 200 + + +def test_put_dataset_readme_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/readme' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + readme metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/readme", + json={ + "content": "This is the readme content", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data["content"] == "This is the readme content" + + # ------------------- RECORD KEYS METADATA ------------------- # def test_get_dataset_record_keys_metadata(_logged_in_client): """ @@ -723,8 +766,8 @@ def test_put_dataset_record_keys_metadata(_logged_in_client): assert response.status_code == 201 response_data = json.loads(response.data) - assert response_data["type"] == "Record Type" - assert response_data["details"] == "Details for Record Keys" + assert response_data["key_type"] == "Record Type" + assert response_data["key_details"] == "Details for Record Keys" # ------------------- RELATED ITEM METADATA ------------------- # @@ -749,7 +792,7 @@ def test_get_dataset_related_item_metadata(_logged_in_client): def test_post_dataset_related_item_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/related-item' + When the '/study/{study_id}/dataset' endpoint is requested (POST) Then check that the response is valid and creates the dataset related item metadata content @@ -777,7 +820,7 @@ def test_post_dataset_related_item_metadata(_logged_in_client): "metadata_scheme": "Metadata Scheme", "scheme_type": "Scheme Type", "scheme_uri": "Scheme URI", - "type": "ARK", + "type": "ark", } ], "issue": "Issue", @@ -819,7 +862,7 @@ def test_post_dataset_related_item_metadata(_logged_in_client): assert response_data[0]["identifiers"][0]["metadata_scheme"] == "Metadata Scheme" assert response_data[0]["identifiers"][0]["scheme_type"] == "Scheme Type" assert response_data[0]["identifiers"][0]["scheme_uri"] == "Scheme URI" - assert response_data[0]["identifiers"][0]["type"] == "ARK" + assert response_data[0]["identifiers"][0]["type"] == "ark" assert response_data[0]["issue"] == "Issue" assert response_data[0]["last_page"] == "Last Page" assert response_data[0]["number_type"] == "Number Type"